1use core::{
2 alloc::Layout,
3 marker::PhantomData,
4 ptr::{self, NonNull},
5};
6use std::alloc::handle_alloc_error;
7
8use crate::re::MemoryManager::TESGlobalAlloc;
9use std_fork::alloc::SelflessAllocator;
10
11#[repr(C)]
13pub struct hkArrayBase<T, A: SelflessAllocator = TESGlobalAlloc> {
14 pub(super) data: Option<NonNull<T>>,
15 size: i32,
16 capacityAndFlags: i32,
17 allocator: PhantomData<A>,
18}
19const _: () = assert!(core::mem::size_of::<hkArrayBase<*mut ()>>() == 0x10);
20
21unsafe impl<T: Send, A> Send for hkArrayBase<T, A> where A: SelflessAllocator {}
22unsafe impl<T: Sync, A> Sync for hkArrayBase<T, A> where A: SelflessAllocator {}
23
24impl<T, A> Default for hkArrayBase<T, A>
25where
26 A: SelflessAllocator,
27{
28 #[inline]
29 fn default() -> Self {
30 Self::new()
31 }
32}
33
34impl<T, A> hkArrayBase<T, A>
35where
36 A: SelflessAllocator,
37{
38 const CAPACITY_MASK: i32 = 0x3FFFFFFF;
40
41 const GROWTH_FACTOR: f32 = 1.5;
43
44 const fn need_dealloc(&self) -> bool {
45 const DONT_DEALLOC_FLAG: i32 = 1 << 31;
46 self.capacityAndFlags & DONT_DEALLOC_FLAG == 0
47 }
48
49 fn allocate_new_memory(&self, new_capacity: i32, copy_len: i32) -> NonNull<T> {
52 let new_layout = Self::new_layout(new_capacity);
54
55 let Ok(new_mem) = A::allocate_zeroed(new_layout) else { handle_alloc_error(new_layout) };
56 let new_mem = new_mem.cast();
57
58 if let Some(data) = self.data {
59 if copy_len > 0 {
60 unsafe {
61 ptr::copy_nonoverlapping(data.as_ptr(), new_mem.as_ptr(), copy_len as usize);
62 }
63 }
64 if self.need_dealloc() {
65 let old_layout = self.layout();
66 unsafe { A::deallocate(data.cast(), old_layout) };
67 }
68 }
69
70 new_mem
71 }
72
73 const fn clear_capacity_bits(&mut self) {
75 self.capacityAndFlags &= !Self::CAPACITY_MASK;
76 }
77
78 #[inline]
79 pub const fn new() -> Self {
80 Self { data: None, size: 0, capacityAndFlags: 0, allocator: PhantomData }
81 }
82
83 #[inline]
87 pub const fn as_ptr(&self) -> *mut T {
88 match self.data {
89 Some(p) => p.as_ptr(),
90 None => ptr::null_mut(),
91 }
92 }
93
94 #[inline]
98 pub const fn len(&self) -> usize {
99 self.size as usize
100 }
101
102 #[inline]
104 pub const fn capacity(&self) -> i32 {
105 self.capacityAndFlags & Self::CAPACITY_MASK
106 }
107
108 #[inline]
110 pub const fn is_empty(&self) -> bool {
111 self.len() == 0
112 }
113
114 #[inline]
115 pub fn with_capacity(capacity: i32) -> Self {
116 let mut ret = Self::new();
117 ret.reserve(capacity);
118 ret
119 }
120
121 #[inline]
125 pub fn reserve(&mut self, new_capacity: i32) {
126 assert!((new_capacity) <= Self::CAPACITY_MASK);
127 if new_capacity <= self.capacity() {
128 return;
129 }
130
131 let new_mem = self.allocate_new_memory(new_capacity, self.size);
132 self.data = Some(new_mem);
133 self.clear_capacity_bits();
134 self.capacityAndFlags |= new_capacity & Self::CAPACITY_MASK;
135 }
136
137 #[inline]
141 pub fn push(&mut self, value: T) {
142 if self.size == self.capacity() {
143 let new_capacity = if self.is_empty() {
144 1
145 } else {
146 (self.len() as f32 * Self::GROWTH_FACTOR).ceil() as i32
147 };
148 self.reserve(new_capacity);
149 }
150
151 unsafe {
152 let end = self.as_ptr().add(self.len());
153 ptr::write(end, value);
154 }
155 self.size += 1;
156 }
157
158 #[inline]
160 pub const fn pop(&mut self) -> Option<T> {
161 let len = self.len();
162 if len == 0 {
163 None
164 } else {
165 self.size -= 1;
166 unsafe { Some(ptr::read(self.as_ptr().add(len - 1))) }
167 }
168 }
169
170 #[inline]
172 pub const fn get(&self, index: usize) -> Option<&T> {
173 if index < self.len() {
174 return unsafe { self.as_ptr().add(index).as_ref() };
175 }
176 None
177 }
178
179 #[inline]
181 pub const fn get_mut(&mut self, index: usize) -> Option<&mut T> {
182 if index < self.len() {
183 return unsafe { self.as_ptr().add(index).as_mut() };
184 }
185 None
186 }
187
188 #[inline]
190 pub fn clear(&mut self) {
191 for elem in self.as_mut_slice() {
193 unsafe {
194 ptr::drop_in_place(elem); }
196 }
197
198 self.size = 0; }
200
201 #[inline]
202 pub const fn iter(&self) -> hkArrayRefIterator<'_, T, A> {
203 hkArrayRefIterator { array: self, index: 0 }
204 }
205
206 #[inline]
207 pub const fn as_slice(&self) -> &[T] {
208 match self.data {
209 Some(data) => unsafe { core::slice::from_raw_parts(data.as_ptr(), self.len()) },
210 None => &[],
211 }
212 }
213
214 #[inline]
215 pub const fn as_mut_slice(&mut self) -> &mut [T] {
216 match self.data {
217 Some(data) => unsafe { core::slice::from_raw_parts_mut(data.as_ptr(), self.len()) },
218 None => &mut [],
219 }
220 }
221
222 #[inline]
239 pub fn contains(&self, value: &T) -> bool
240 where
241 T: PartialEq,
242 {
243 for i in 0..self.len() {
244 if let Some(item) = self.get(i) {
245 if item == value {
246 return true;
247 }
248 }
249 }
250 false
251 }
252
253 #[inline]
275 pub fn retain<F>(&mut self, mut f: F)
276 where
277 F: FnMut(&T) -> bool,
278 {
279 let mut retained = 0;
280
281 for i in 0..(self.len()) {
282 let elem = match unsafe { self.as_ptr().add(i).as_ref() } {
283 Some(elem) => elem,
284 None => continue,
285 };
286
287 if f(elem) {
288 if retained != i {
289 unsafe {
290 let src = self.as_ptr().add(i);
291 let dst = self.as_ptr().add(retained);
292 ptr::copy_nonoverlapping(src, dst, 1);
293 }
294 }
295 retained += 1;
296 } else {
297 unsafe { ptr::drop_in_place(self.as_ptr().add(i)) };
299 }
300 }
301
302 self.size = retained as i32;
303 }
304
305 #[inline]
333 pub fn drain<R>(&mut self, range: R) -> hkArrayDrain<'_, T, A>
334 where
335 R: core::ops::RangeBounds<usize>,
336 {
337 let len = self.len();
338 let core::ops::Range { start, end } = stdx::slice::range(range, ..len);
339 debug_assert!(start <= end);
340 debug_assert!(end <= len);
341
342 self.size = start as i32;
345
346 hkArrayDrain {
347 iter: unsafe { core::slice::from_raw_parts(self.as_ptr().add(start), end - start) }
348 .iter(),
349 tail_start: end,
350 tail_len: len - end,
351 array: unsafe { NonNull::new_unchecked(self as *mut Self) },
352 }
353 }
354
355 fn new_layout(n: i32) -> Layout {
361 Layout::array::<T>(n as usize).expect("hkArrayBase need: alloc size < isize::MAX")
362 }
363
364 fn layout(&self) -> Layout {
370 Self::new_layout(self.capacity())
371 }
372}
373
374impl<T, A> Drop for hkArrayBase<T, A>
375where
376 A: SelflessAllocator,
377{
378 #[inline]
379 fn drop(&mut self) {
380 self.clear();
381 if let Some(data) = self.data {
382 unsafe { A::deallocate(data.cast(), self.layout()) }
383 }
384 }
385}
386
387impl<T, A> hkArrayBase<T, A>
388where
389 T: Clone,
390 A: SelflessAllocator,
391{
392 #[inline]
396 pub fn resize(&mut self, new_len: i32, value: T) {
397 assert!((0..=Self::CAPACITY_MASK).contains(&new_len));
398 if new_len == self.size {
399 return;
400 }
401
402 if new_len < self.size {
403 for i in new_len..self.size {
404 unsafe {
405 let elem = self.as_ptr().add(i as usize);
406 ptr::drop_in_place(elem);
407 }
408 }
409 }
410
411 let new_mem = self.allocate_new_memory(new_len, self.size.min(new_len));
412
413 if new_len > self.size {
414 for i in self.len()..(new_len as usize) {
415 unsafe {
416 let elem = new_mem.add(i);
417 ptr::write(elem.as_ptr(), value.clone());
418 }
419 }
420 }
421
422 self.data = Some(new_mem);
423 self.size = new_len;
424 self.clear_capacity_bits();
425 self.capacityAndFlags |= new_len & Self::CAPACITY_MASK;
426 }
427}
428
429pub struct hkArrayDrain<'a, T, A>
433where
434 A: SelflessAllocator,
435{
436 tail_start: usize, tail_len: usize, iter: core::slice::Iter<'a, T>,
439 array: NonNull<hkArrayBase<T, A>>,
440}
441
442impl<T, A> Iterator for hkArrayDrain<'_, T, A>
443where
444 A: SelflessAllocator,
445{
446 type Item = T;
447
448 #[inline]
449 fn next(&mut self) -> Option<Self::Item> {
450 self.iter.next().map(|item| unsafe { ptr::read(item) })
451 }
452
453 #[inline]
454 fn size_hint(&self) -> (usize, Option<usize>) {
455 self.iter.size_hint()
456 }
457}
458
459impl<T, A> DoubleEndedIterator for hkArrayDrain<'_, T, A>
460where
461 A: SelflessAllocator,
462{
463 #[inline]
464 fn next_back(&mut self) -> Option<Self::Item> {
465 self.iter.next_back().map(|item| unsafe { ptr::read(item) })
466 }
467}
468
469impl<T, A> ExactSizeIterator for hkArrayDrain<'_, T, A>
470where
471 A: SelflessAllocator,
472{
473 #[inline]
474 fn len(&self) -> usize {
475 self.iter.len()
476 }
477}
478
479impl<T, A: SelflessAllocator> Drop for hkArrayDrain<'_, T, A> {
480 fn drop(&mut self) {
481 if core::mem::needs_drop::<T>() {
486 self.for_each(drop);
487 }
488
489 if self.tail_len > 0 {
491 unsafe {
492 let array = self.array.as_mut();
493
494 let start = array.len();
495 let tail = self.tail_start;
496 if tail != start {
497 let ptr = array.as_ptr();
498 let src = ptr.add(tail);
499 let dst = ptr.add(start);
500 ptr::copy(src, dst, self.tail_len);
501 }
502 array.size = (start + self.tail_len) as i32;
503 }
504 }
505 }
506}
507
508pub struct hkArrayIterator<T, A>
512where
513 A: SelflessAllocator,
514{
515 array: hkArrayBase<T, A>,
516 index: usize,
517}
518
519impl<T, A> Iterator for hkArrayIterator<T, A>
520where
521 A: SelflessAllocator,
522{
523 type Item = T;
524
525 #[inline]
526 fn next(&mut self) -> Option<Self::Item> {
527 if self.index < self.array.len() {
528 let item = unsafe { self.array.data?.add(self.index).read() };
529 self.index += 1;
530 Some(item)
531 } else {
532 None
533 }
534 }
535
536 #[inline]
537 fn size_hint(&self) -> (usize, Option<usize>) {
538 let len = self.array.len();
539 (len, Some(len))
540 }
541}
542
543pub struct hkArrayRefIterator<'a, T, A>
544where
545 A: SelflessAllocator,
546{
547 array: &'a hkArrayBase<T, A>,
548 index: usize,
549}
550
551impl<'a, T, A> Iterator for hkArrayRefIterator<'a, T, A>
552where
553 A: SelflessAllocator,
554{
555 type Item = &'a T;
556
557 #[inline]
558 fn next(&mut self) -> Option<Self::Item> {
559 if self.index < self.array.len() {
560 let item = unsafe { self.array.data?.add(self.index).as_ref() };
561 self.index += 1;
562 Some(item)
563 } else {
564 None
565 }
566 }
567
568 #[inline]
569 fn size_hint(&self) -> (usize, Option<usize>) {
570 let len = self.array.len();
571 (len, Some(len))
572 }
573}
574
575pub struct hkArrayIterMut<'a, T, A>
576where
577 A: SelflessAllocator,
578{
579 array: &'a mut hkArrayBase<T, A>,
580 index: usize,
581}
582
583impl<'a, T, A> Iterator for hkArrayIterMut<'a, T, A>
584where
585 A: SelflessAllocator,
586{
587 type Item = &'a mut T;
588
589 #[inline]
590 fn next(&mut self) -> Option<Self::Item> {
591 if self.index < self.array.len() {
592 unsafe {
593 let ptr = self.array.as_ptr().add(self.index);
594 self.index += 1;
595 Some(&mut *ptr)
596 }
597 } else {
598 None
599 }
600 }
601
602 #[inline]
603 fn size_hint(&self) -> (usize, Option<usize>) {
604 let len = self.array.len();
605 (len, Some(len))
606 }
607}
608
609impl<T, A> IntoIterator for hkArrayBase<T, A>
610where
611 A: SelflessAllocator,
612{
613 type Item = T;
614 type IntoIter = hkArrayIterator<T, A>;
615
616 #[inline]
617 fn into_iter(self) -> Self::IntoIter {
618 hkArrayIterator { array: self, index: 0 }
619 }
620}
621
622impl<'a, T, A> IntoIterator for &'a hkArrayBase<T, A>
623where
624 A: SelflessAllocator,
625{
626 type Item = &'a T;
627 type IntoIter = hkArrayRefIterator<'a, T, A>;
628
629 #[inline]
630 fn into_iter(self) -> Self::IntoIter {
631 hkArrayRefIterator { array: self, index: 0 }
632 }
633}
634
635impl<'a, T, A> IntoIterator for &'a mut hkArrayBase<T, A>
636where
637 A: SelflessAllocator,
638{
639 type Item = &'a mut T;
640 type IntoIter = hkArrayIterMut<'a, T, A>;
641
642 #[inline]
643 fn into_iter(self) -> Self::IntoIter {
644 hkArrayIterMut { array: self, index: 0 }
645 }
646}
647
648impl<T, A> Extend<T> for hkArrayBase<T, A>
649where
650 A: SelflessAllocator,
651{
652 #[inline]
653 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
654 for elem in iter {
655 self.push(elem);
656 }
657 }
658}
659
660impl<T: core::fmt::Debug, A: SelflessAllocator> core::fmt::Debug for hkArrayBase<T, A> {
664 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
665 f.debug_struct("hkArrayBase")
666 .field("data", &self.iter().collect::<Vec<_>>())
667 .field("size", &self.size)
668 .field("capacityAndFlags", &self.capacityAndFlags)
669 .finish()
670 }
671}
672
673impl<T: Clone, A: SelflessAllocator> Clone for hkArrayBase<T, A> {
674 fn clone(&self) -> Self {
675 let mut new = Self::new();
676 new.reserve(self.capacity());
677 for i in 0..self.len() {
678 unsafe {
679 let ptr = self.as_ptr().add(i);
680 new.push((*ptr).clone());
681 }
682 }
683 new
684 }
685}
686
687impl<T: PartialEq, A: SelflessAllocator> PartialEq for hkArrayBase<T, A> {
688 fn eq(&self, other: &Self) -> bool {
689 if self.len() != other.len() {
690 return false;
691 }
692 for i in 0..self.len() {
693 unsafe {
694 let a = self.as_ptr().add(i);
695 let b = other.as_ptr().add(i);
696 if *a != *b {
697 return false;
698 }
699 }
700 }
701 true
702 }
703}
704
705impl<T: Eq, A: SelflessAllocator> Eq for hkArrayBase<T, A> {}
706
707impl<T: PartialOrd, A: SelflessAllocator> PartialOrd for hkArrayBase<T, A> {
708 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
709 for i in 0..self.len().min(other.len()) {
710 unsafe {
711 let a = self.as_ptr().add(i);
712 let b = other.as_ptr().add(i);
713 match (*a).partial_cmp(&*b) {
714 Some(core::cmp::Ordering::Equal) => {}
715 non_eq => return non_eq,
716 }
717 }
718 }
719 self.len().partial_cmp(&other.len())
720 }
721}
722
723impl<T: Ord, A: SelflessAllocator> Ord for hkArrayBase<T, A> {
724 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
725 for i in 0..self.len().min(other.len()) {
726 unsafe {
727 let a = self.as_ptr().add(i);
728 let b = other.as_ptr().add(i);
729 match (*a).cmp(&*b) {
730 core::cmp::Ordering::Equal => {}
731 non_eq => return non_eq,
732 }
733 }
734 }
735 self.len().cmp(&other.len())
736 }
737}
738
739impl<T: core::hash::Hash, A: SelflessAllocator> core::hash::Hash for hkArrayBase<T, A> {
740 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
741 for i in 0..self.len() {
742 unsafe {
743 let ptr = self.as_ptr().add(i);
744 ptr.hash(state);
745 }
746 }
747 }
748}
749
750impl<T, A> core::ops::Index<usize> for hkArrayBase<T, A>
751where
752 A: SelflessAllocator,
753{
754 type Output = T;
755
756 #[inline]
757 fn index(&self, index: usize) -> &Self::Output {
758 assert!(index < (self.len()), "Index out of bounds");
759 unsafe { self.as_ptr().wrapping_add(index).as_ref().expect("Index out of bounds") }
760 }
761}
762
763impl<T, A> core::ops::IndexMut<usize> for hkArrayBase<T, A>
764where
765 A: SelflessAllocator,
766{
767 #[inline]
768 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
769 assert!(index < (self.len()), "Index out of bounds");
770 unsafe { self.as_ptr().wrapping_add(index).as_mut().expect("Index out of bounds") }
771 }
772}